Interactive COVID-19 Visualization of United States
Awesome summary
## generic imports
import altair as alt
import pandas as pd
from vega_datasets import data
import glob
from os import listdir,path
from pathlib import Path
counties = alt.topo_feature(data.us_10m.url, 'counties')
# used for world visualization later
#countries = alt.topo_feature(data.world_110m.url, 'countries')
source = data.unemployment.url
# open covid 'csse_covid_19_daily_reports_us'
filePath = './COVID-19-master/csse_covid_19_data/csse_covid_19_daily_reports_us/01-01-2021.csv'
filename = path.basename('/csse_covid_19_daily_reports_us/01-01-2021.csv')
# filePath = str(filePath)
print(filename)
df = pd.read_csv('%s' % filePath)
df["date"] = filename.replace('.csv','')
# df.to_csv("{filename}.csv", index=False)
# list comprehensions
df['region'] = [fiveRegion(x) for x in df['Province_State']]
df['FIPS'] = [int(x) for x in df['FIPS']]
# cleanup
df = df.rename({"Province_State": "State", "Country_Region":"Country"}, axis='columns')
df.shape
# Delete rows where region is 'not a region'
# This deletion is completed by "selecting" rows where regions are non 'not a region'
df = df.loc[df["region"] != 'Not a region']
df.shape
csvlist21 = find_csv_filenames(directory_in_str, "2021.csv")
csvlist20 = find_csv_filenames(directory_in_str, "2020.csv")
csvlist21new = [directory_in_str + s for s in csvlist21]
csvlist20new = [directory_in_str + s for s in csvlist20]
# print(csvlist)
data21 = [] # pd.concat takes a list of dataframes as an agrument
data20 = [] # pd.concat takes a list of dataframes as an agrument
#https://stackoverflow.com/questions/41857659/python-pandas-add-filename-column-csv
#https://stackoverflow.com/questions/9234560/find-all-csv-files-in-a-directory-using-python
for csv in csvlist21new:
frame = pd.read_csv(csv)
datestring = path.basename(csv)
curDate = datestring.replace('.csv', '')
frame['date'] = curDate
splitDate = curDate.split('-')
frame['month'] = splitDate[0]
frame['day'] = splitDate[1]
frame['year'] = splitDate[2]
cols=['year','month','day']
frame['ymd'] = frame[cols].apply(lambda x: '-'.join(x.values.astype(str)), axis="columns")
# frame['ymd']=pd.to_datetime(frame['ymd'])
data21.append(frame)
######### THE SAME FOR 2020
for csv in csvlist20new:
frame = pd.read_csv(csv)
datestring = path.basename(csv)
curDate = datestring.replace('.csv', '')
frame['date'] = curDate
splitDate = curDate.split('-')
frame['month'] = splitDate[0]
frame['day'] = splitDate[1]
frame['year'] = splitDate[2]
cols=['year','month','day']
frame['ymd'] = frame[cols].apply(lambda x: '-'.join(x.values.astype(str)), axis="columns")
# frame['ymd']=pd.to_datetime(frame['ymd'])
data20.append(frame)
# combine all csvs into one csv
# add date to each respective file
bigframe21 = pd.concat(data21, ignore_index=True) #dont want pandas to try an align row indexes
# list comprehensions
bigframe21['region'] = [fiveRegion(x) for x in bigframe21['Province_State']]
# Delete rows where region is 'not a region'
# This deletion is completed by "selecting" rows where regions are non 'not a region'
bigframe21 = bigframe21.loc[bigframe21["region"] != 'Not a region']
bigframe21['FIPS'] = [int(x) for x in bigframe21['FIPS']]
# cleanup
bigframe21 = bigframe21.rename({"Province_State": "State", "Country_Region":"Country"}, axis='columns')
###### THE SAME for 2020
bigframe20 = pd.concat(data20, ignore_index=True) #dont want pandas to try an align row indexes
# list comprehensions
bigframe20['region'] = [fiveRegion(x) for x in bigframe20['Province_State']]
# Delete rows where region is 'not a region'
# This deletion is completed by "selecting" rows where regions are non 'not a region'
bigframe20 = bigframe20.loc[bigframe20["region"] != 'Not a region']
bigframe20['FIPS'] = [int(x) for x in bigframe20['FIPS']]
# cleanup
bigframe20 = bigframe20.rename({"Province_State": "State", "Country_Region":"Country"}, axis='columns')
# df.to_csv("total.csv", index=False)
colorBrewer = alt.Color('region:N',
scale=alt.Scale(domain=[
'Midwest',
'Northeast',
'Southeast',
'Southwest',
'West'
],range=['#edf8fb','#b2e2e2','#66c2a4','#2ca25f','#006d2c']))
states = alt.topo_feature(data.us_10m.url, 'states')
map1= alt.Chart(states).mark_geoshape(
fill='#e8e4f3', stroke='black', strokeWidth=1
).project(
type='albersUsa'
).properties(
width=1000,
height=650
)
# map1
click = alt.selection_single(fields=['State'], bind='legend')
covid21= alt.Chart(bigframe21).mark_circle().transform_filter(
# {'not': alt.FieldOneOfPredicate(field='Province_State', oneOf=["Diamond Princess", "Grand Princess", "Puerto Rico", "Guam", "Virgin Islands"])}
{'not': alt.FieldEqualPredicate(field='region', equal="Not a region")}
).encode(
longitude='Long_:Q',
latitude='Lat:Q',
color=alt.Color('region:N', scale=alt.Scale(scheme='viridis')),
size=alt.Size('Confirmed',
# scale=alt.Scale(domain=[-1, 200], range=[10,400])
scale=alt.Scale(range=[100, 3000]),
legend=None
),
tooltip=['State:N','Confirmed:Q','Deaths:Q'],
# opacity=alt.condition(click, alt.value(1), alt.value(0.3))
).properties(width=1000,height=650,title='Confirmed Cases 2021')
# full map
# testArea = alt.layer(areachart, lineChart).add_selection(month_select).transform_filter(month_select).resolve_scale(y='independent', color='independent').properties(width=600, height=600).interactive()
covid20= alt.Chart(bigframe20).mark_circle().transform_filter(
# {'not': alt.FieldOneOfPredicate(field='Province_State', oneOf=["Diamond Princess", "Grand Princess", "Puerto Rico", "Guam", "Virgin Islands"])}
{'not': alt.FieldEqualPredicate(field='region', equal="Not a region")}
).encode(
longitude='Long_:Q',
latitude='Lat:Q',
color=alt.Color('region:N', scale=alt.Scale(scheme='viridis')),
size=alt.Size('Confirmed',
# scale=alt.Scale(domain=[-1, 200], range=[10,400])
scale=alt.Scale(range=[100, 3000]),
legend=None
),
tooltip=['State:N','Confirmed:Q','Deaths:Q'],
# opacity=alt.condition(click, alt.value(1), alt.value(0.3))
).properties(width=1000,height=650,title='Confirmed Cases 2020')
map21 = (map1 + covid21)
map20 = (map1 + covid20)
alt.hconcat(map20, map21)